Tensors

Warm-up: numpy


In [2]:
# -*- coding: utf-8 -*-
import numpy as np

In [3]:
# N is batch size
# D_in is input dimension;
# H is hidden dimension;
# D_out is output dimension.

N, D_in, H, D_out = 64, 1000, 100, 10

In [4]:
# Create random input and output data
x = np.random.randn(N, D_in)
y = np.random.randn(N, D_out)

In [5]:
# Randomly initialize weights
w1 = np.random.randn(D_in, H)
w2 = np.random.randn(H, D_out)

In [6]:
learning_rate = 1e-6
for t in range(500):
    # Forward pass: compute predicted y
    h = x.dot(w1)
    h_relu = np.maximum(h, 0)
    y_pred = h_relu.dot(w2)

    # Compute and print loss
    loss = np.square(y_pred - y).sum()
    print(t, loss)

    # Backprop to compute gradients of w1 and w2 with respect to loss
    grad_y_pred = 2.0 * (y_pred - y)
    grad_w2 = h_relu.T.dot(grad_y_pred)
    grad_h_relu = grad_y_pred.dot(w2.T)
    grad_h = grad_h_relu.copy()
    grad_h[h < 0] = 0
    grad_w1 = x.T.dot(grad_h)

    # Update weights
    w1 -= learning_rate * grad_w1
    w2 -= learning_rate * grad_w2


0 38956746.569864005
1 32209686.75252662
2 24228914.0350871
3 15877890.182624102
4 9390301.733084235
5 5500074.000594686
6 3429778.419421806
7 2347416.4113436947
8 1740288.1416685295
9 1363861.3405861554
10 1105924.107644165
11 915907.3113529782
12 768880.6503582762
13 651888.1636764943
14 556774.7404462438
15 478433.55473524507
16 413231.1274069061
17 358514.60874253185
18 312282.424728881
19 273034.184990944
20 239515.28431043698
21 210783.82489751207
22 186056.75162117707
23 164650.63321717555
24 146060.69451449992
25 129853.09941936375
26 115686.55633238208
27 103267.38607898887
28 92375.10215935568
29 82773.67749056035
30 74291.48310559729
31 66778.14052885468
32 60109.62261424337
33 54182.053139679694
34 48926.53919496742
35 44237.96176475592
36 40050.63966261402
37 36299.572447005616
38 32936.56455236734
39 29915.734905231024
40 27198.579551215975
41 24750.632281321363
42 22542.59531729658
43 20548.134208887015
44 18746.00962203733
45 17115.095687602763
46 15638.901942616223
47 14300.703525105688
48 13085.76453960341
49 11982.452733027105
50 10979.435971981542
51 10067.179844165876
52 9235.833661767556
53 8478.22983214133
54 7787.129180599806
55 7156.36325110492
56 6579.8900905221635
57 6052.69673204876
58 5570.477841999861
59 5128.927302278675
60 4724.743896679874
61 4354.159270788958
62 4014.4774881853327
63 3702.6893953823983
64 3416.486607700608
65 3153.623323497331
66 2912.3346449573896
67 2690.583012908092
68 2486.6197320794518
69 2298.902596489649
70 2126.0326439154983
71 1966.7710429820072
72 1820.033269866022
73 1684.738563604651
74 1560.0023431057248
75 1445.0872706698037
76 1338.9640604343874
77 1240.958895332557
78 1150.4701905144025
79 1066.8669020473012
80 989.5611751093937
81 918.0981738779215
82 852.0096494965992
83 790.8396648258665
84 734.2401965453585
85 681.8353056318871
86 633.3379710545739
87 588.3942005114344
88 546.7730909515319
89 508.205224908953
90 472.4434115731917
91 439.28403770072794
92 408.52481903125005
93 379.9961150369446
94 353.52552994670066
95 328.95842018358053
96 306.15604588980125
97 284.98933461229353
98 265.33285367445677
99 247.06926191721524
100 230.10111427748808
101 214.3333678852686
102 199.67810225964323
103 186.04362139145962
104 173.36837284782894
105 161.57870433879214
106 150.61318539735362
107 140.40964221132748
108 130.91546816233094
109 122.07650052721144
110 113.84875061984977
111 106.187401617859
112 99.05451975837133
113 92.41217393542925
114 86.22608085988395
115 80.46355331493234
116 75.09410927501088
117 70.09120192960853
118 65.4286978911687
119 61.08298806244368
120 57.03270241943168
121 53.254792072237834
122 49.732200734265554
123 46.44751571614732
124 43.38452487498466
125 40.52653871536998
126 37.86134710807582
127 35.37357817831775
128 33.05216641878478
129 30.88584580363708
130 28.8638989395455
131 26.97729716267935
132 25.216279610725014
133 23.571583703507727
134 22.035892940486825
135 20.601845850664766
136 19.26234931780686
137 18.01193446694314
138 16.8438621181
139 15.752594140462243
140 14.732982944158653
141 13.780285691545105
142 12.890249963699521
143 12.058799460017266
144 11.281501492585848
145 10.555012219516485
146 9.876029091368409
147 9.241454282127798
148 8.648151005306321
149 8.093607951901156
150 7.574824888301233
151 7.089743165728353
152 6.636207869575994
153 6.212018354995714
154 5.81550729977284
155 5.4445420315196795
156 5.097477544947706
157 4.772786489574862
158 4.469025359782242
159 4.1848867125291225
160 3.9191197054608202
161 3.670387959745063
162 3.4376577789965346
163 3.219811272410209
164 3.015922015403633
165 2.825091837182982
166 2.646514090405912
167 2.4793095506750316
168 2.322793122233275
169 2.1762915473802407
170 2.0391134538036138
171 1.9107272069238737
172 1.7904628003510157
173 1.677859524504138
174 1.5724048765985028
175 1.473656155398595
176 1.3812342693046986
177 1.2946119544212622
178 1.213493685364585
179 1.1374945384831605
180 1.066305751812882
181 0.9996395803603433
182 0.9371698914287045
183 0.8786566221757285
184 0.8238168406074677
185 0.7724353652871687
186 0.724297203315885
187 0.6791973471743793
188 0.6369201446661228
189 0.5973013277759667
190 0.5601749785899521
191 0.5253735663245637
192 0.49276375208808965
193 0.4621904636644777
194 0.4335268985191708
195 0.40666529643651717
196 0.38147774246248534
197 0.35787629961693584
198 0.3357370146870659
199 0.3149787418303172
200 0.29552030821304104
201 0.2772735842836491
202 0.2601692610347913
203 0.24412330359087858
204 0.22907543708322334
205 0.21496337219769987
206 0.2017289537738297
207 0.18932179370420138
208 0.1776802221335096
209 0.1667592011136187
210 0.1565165615948756
211 0.1469079857325708
212 0.1378959865693602
213 0.12943907208186733
214 0.12150673106279525
215 0.11406341857757654
216 0.1070805677726116
217 0.10053034400831484
218 0.09438199577252195
219 0.08861362391282217
220 0.08320038235942567
221 0.07812033504598234
222 0.07335343831143054
223 0.06888004020675365
224 0.06468064937803342
225 0.06073944090418952
226 0.05704084212382489
227 0.053569450658463365
228 0.050310875397776435
229 0.047251455664480746
230 0.04437987324382051
231 0.04168390268922037
232 0.03915327509191622
233 0.036777851104266235
234 0.03454675317309802
235 0.03245246749933514
236 0.030486491020133073
237 0.028640538239377344
238 0.026907261536531977
239 0.025279013510130663
240 0.023750114105306187
241 0.022314297496485717
242 0.02096649605082027
243 0.019700543281218317
244 0.01851141595171149
245 0.01739460973589985
246 0.01634557811083031
247 0.015360623790989056
248 0.01443533547411659
249 0.013566048706856324
250 0.012749457852064501
251 0.011982266383052494
252 0.011261952697197996
253 0.010584997623122995
254 0.009949023640339658
255 0.009351591123164166
256 0.008790202407216823
257 0.008263085094089745
258 0.0077674426714777756
259 0.007301811820902952
260 0.006864315168710967
261 0.006453306854306232
262 0.006067067899888603
263 0.005703955400298095
264 0.005362747340774219
265 0.00504209572842656
266 0.004740769945209335
267 0.004457682194175955
268 0.004191486615103591
269 0.003941313395719843
270 0.0037061586930546117
271 0.00348513199922644
272 0.0032774528729933227
273 0.003082196263986455
274 0.002898614790557543
275 0.0027260546938435177
276 0.0025638433964815985
277 0.0024113837578646777
278 0.0022679805896100207
279 0.0021331955514111447
280 0.0020064391492341663
281 0.0018872936901317976
282 0.0017752729193794468
283 0.0016699144104069497
284 0.0015708624134992773
285 0.001477731904181261
286 0.0013901755252716582
287 0.0013078155087076228
288 0.0012303623750059911
289 0.0011575266656444847
290 0.0010890365439713754
291 0.0010246421964000432
292 0.0009640646223713317
293 0.0009070768757279056
294 0.000853485324655179
295 0.000803074973497801
296 0.0007556735754650881
297 0.0007110879335646991
298 0.0006691386022889923
299 0.0006296778007579378
300 0.0005925613310319893
301 0.0005576481309122218
302 0.0005248055551869174
303 0.0004939052756291259
304 0.0004648338704467441
305 0.00043748925172041
306 0.00041175832532002593
307 0.00038755256524371397
308 0.0003647782934106885
309 0.00034334909837856247
310 0.00032318828144529336
311 0.0003042159232766442
312 0.0002863619439858704
313 0.0002695614266609624
314 0.00025375430881701246
315 0.0002388830581094696
316 0.00022488171749463295
317 0.00021170744858863162
318 0.0001993089834540118
319 0.00018764260120106193
320 0.00017666502239432717
321 0.0001663311988180589
322 0.000156604091372385
323 0.00014745009050302606
324 0.00013883460900174775
325 0.00013072615438974235
326 0.00012309105663380065
327 0.00011590468366591115
328 0.00010914131577531965
329 0.00010277492081391198
330 9.678057247756632e-05
331 9.11393692322214e-05
332 8.58271556427006e-05
333 8.082749234052721e-05
334 7.612059437632193e-05
335 7.168869825153574e-05
336 6.751730491924877e-05
337 6.358896521426727e-05
338 5.989104063673425e-05
339 5.6409043915214565e-05
340 5.312976323225058e-05
341 5.004232193358226e-05
342 4.7135772928624514e-05
343 4.4399129810569764e-05
344 4.182157282989501e-05
345 3.9394313570282935e-05
346 3.710872874789334e-05
347 3.495702209609764e-05
348 3.293077659499531e-05
349 3.1022103649619705e-05
350 2.92247801213659e-05
351 2.7531691453681795e-05
352 2.593768757020875e-05
353 2.44367387768615e-05
354 2.302242278955178e-05
355 2.1690469838669077e-05
356 2.0435848854888795e-05
357 1.9254490187014835e-05
358 1.8141728655463845e-05
359 1.709346919060087e-05
360 1.6106141971926848e-05
361 1.517609325403791e-05
362 1.429999926279567e-05
363 1.347481739471203e-05
364 1.2697482930417008e-05
365 1.1964999234750682e-05
366 1.1275105209093983e-05
367 1.062518736838126e-05
368 1.001289611779299e-05
369 9.436097503937284e-06
370 8.892578409085577e-06
371 8.380636747037405e-06
372 7.898236912581736e-06
373 7.4437311218472075e-06
374 7.015531570788277e-06
375 6.612079453780392e-06
376 6.231942608985362e-06
377 5.873709529162438e-06
378 5.536176530196724e-06
379 5.218137420089368e-06
380 4.918466321446307e-06
381 4.636079853292332e-06
382 4.369941987325113e-06
383 4.1191702356581006e-06
384 3.882908081977591e-06
385 3.6602344828104382e-06
386 3.4503465051284558e-06
387 3.2525655061379575e-06
388 3.0661535172582894e-06
389 2.8905212616744145e-06
390 2.724980963161972e-06
391 2.5689351273182524e-06
392 2.4218514742927367e-06
393 2.2832396741362797e-06
394 2.152617506915135e-06
395 2.029497657380395e-06
396 1.9134398831434113e-06
397 1.8040364237386476e-06
398 1.7009275994130918e-06
399 1.6037414696076764e-06
400 1.5121306062049893e-06
401 1.425765774686455e-06
402 1.3443573855289933e-06
403 1.2676160099195458e-06
404 1.1952712528876932e-06
405 1.1270809858011692e-06
406 1.0627857743820217e-06
407 1.0021777656231152e-06
408 9.450465464772124e-07
409 8.911758974326834e-07
410 8.403949376867669e-07
411 7.9251496816969e-07
412 7.473827687103561e-07
413 7.048194219864965e-07
414 6.646949497634621e-07
415 6.26868053547059e-07
416 5.911940637453822e-07
417 5.57562518244967e-07
418 5.258520143430821e-07
419 4.959486051848099e-07
420 4.677566625326546e-07
421 4.411720510881699e-07
422 4.161036130389545e-07
423 3.9246114636595157e-07
424 3.701707357769748e-07
425 3.491540891042201e-07
426 3.293351834411193e-07
427 3.106420524242269e-07
428 2.9301434574935796e-07
429 2.7638985850486665e-07
430 2.6071641931986066e-07
431 2.459325371245041e-07
432 2.3198832617510677e-07
433 2.1883779279731638e-07
434 2.0643610384894802e-07
435 1.9474285871876715e-07
436 1.8371060103864747e-07
437 1.7330588801058915e-07
438 1.6349271161219765e-07
439 1.5423941086291102e-07
440 1.4551207150081393e-07
441 1.3727848530022403e-07
442 1.2951193204084522e-07
443 1.221863233496071e-07
444 1.152779208900775e-07
445 1.0876054997443187e-07
446 1.0261293930639034e-07
447 9.681349061529626e-08
448 9.134297959703192e-08
449 8.618423214794273e-08
450 8.13165097349404e-08
451 7.672511222640365e-08
452 7.239357574056894e-08
453 6.830825856832967e-08
454 6.445400968310212e-08
455 6.0817189003906e-08
456 5.7386457552555245e-08
457 5.414995111920714e-08
458 5.109672960232136e-08
459 4.821621889880979e-08
460 4.549840365043485e-08
461 4.293414240909814e-08
462 4.0515217387704e-08
463 3.823350117000724e-08
464 3.6080312907410855e-08
465 3.4048514078726246e-08
466 3.2131606706707136e-08
467 3.0323040917860266e-08
468 2.861684059696413e-08
469 2.700645527378161e-08
470 2.5487026278520127e-08
471 2.405355516747086e-08
472 2.2700935344536916e-08
473 2.142456959618066e-08
474 2.0220227580879897e-08
475 1.9083825044477757e-08
476 1.8011536507154188e-08
477 1.699971784930133e-08
478 1.6044689835159533e-08
479 1.5143466959351744e-08
480 1.4293214001328022e-08
481 1.3490763554880736e-08
482 1.2733493441413523e-08
483 1.2018807700738621e-08
484 1.134438551853218e-08
485 1.070796400948254e-08
486 1.0107406399930371e-08
487 9.540561783452702e-09
488 9.005575516535018e-09
489 8.500703212267263e-09
490 8.024185050305383e-09
491 7.574471476010738e-09
492 7.1500098102625355e-09
493 6.749423653795027e-09
494 6.371321642006907e-09
495 6.014556121536027e-09
496 5.6777580005986275e-09
497 5.35985247423456e-09
498 5.0598309393080365e-09
499 4.776654929947531e-09

PyTorch: Tensors


In [7]:
# -*- coding: utf-8 -*-
import torch

In [8]:
dtype = torch.float
device = torch.device("cpu")
# device = torch.device("cuda:0") # Uncomment this to run on GPU

In [9]:
# N is batch size
# D_in is input dimension;
# H is hidden dimension;
# D_out is output dimension.

N, D_in, H, D_out = 64, 1000, 100, 10

In [10]:
# Create random input and output data
x = torch.randn(N, D_in, device=device, dtype=dtype)
y = torch.randn(N, D_out, device=device, dtype=dtype)

In [11]:
# Randomly initialize weights
w1 = torch.randn(D_in, H, device=device, dtype=dtype)
w2 = torch.randn(H, D_out, device=device, dtype=dtype)

In [12]:
learning_rate = 1e-6
for t in range(500):
    # Forward pass: compute predicted y
    h = x.mm(w1)
    h_relu = h.clamp(min=0)
    y_pred = h_relu.mm(w2)

    # Compute and print loss
    loss = (y_pred - y).pow(2).sum().item()
    print(t, loss)

    # Backprop to compute gradients of w1 and w2 with respect to loss
    grad_y_pred = 2.0 * (y_pred - y)
    grad_w2 = h_relu.t().mm(grad_y_pred)
    grad_h_relu = grad_y_pred.mm(w2.t())
    grad_h = grad_h_relu.clone()
    grad_h[h < 0] = 0
    grad_w1 = x.t().mm(grad_h)

    # Update weights using gradient descent
    w1 -= learning_rate * grad_w1
    w2 -= learning_rate * grad_w2


0 35037344.0
1 30013722.0
2 26158256.0
3 20697328.0
4 14416138.0
5 9037380.0
6 5431960.5
7 3330000.25
8 2177336.5
9 1535701.125
10 1158691.75
11 918837.75
12 753467.9375
13 631650.9375
14 537445.375
15 462089.625
16 400398.6875
17 349123.84375
18 306013.28125
19 269539.40625
20 238406.84375
21 211657.125
22 188549.125
23 168487.421875
24 150980.578125
25 135658.59375
26 122186.4453125
27 110322.359375
28 99836.09375
29 90526.8828125
30 82232.734375
31 74840.328125
32 68227.8671875
33 62296.9140625
34 56965.42578125
35 52164.33203125
36 47828.53125
37 43916.1796875
38 40379.5234375
39 37168.9140625
40 34250.12890625
41 31593.078125
42 29173.2734375
43 26965.13671875
44 24947.4765625
45 23101.013671875
46 21408.40234375
47 19855.486328125
48 18429.369140625
49 17117.390625
50 15910.314453125
51 14798.234375
52 13772.201171875
53 12825.2880859375
54 11952.359375
55 11146.0458984375
56 10399.7587890625
57 9708.3291015625
58 9067.462890625
59 8473.4130859375
60 7922.10400390625
61 7410.19189453125
62 6934.60302734375
63 6492.08740234375
64 6080.32373046875
65 5696.98876953125
66 5340.09228515625
67 5007.64697265625
68 4697.6376953125
69 4408.79833984375
70 4139.38818359375
71 3887.748291015625
72 3652.704345703125
73 3432.98486328125
74 3227.43994140625
75 3035.100341796875
76 2855.095947265625
77 2686.556396484375
78 2528.7041015625
79 2380.715087890625
80 2241.966552734375
81 2111.826416015625
82 1989.7662353515625
83 1875.2352294921875
84 1767.6839599609375
85 1666.716552734375
86 1571.904296875
87 1482.8211669921875
88 1399.082275390625
89 1320.3114013671875
90 1246.2559814453125
91 1176.622314453125
92 1111.099609375
93 1049.435546875
94 991.3804321289062
95 936.7118530273438
96 885.2257080078125
97 836.685302734375
98 790.9500732421875
99 747.8553466796875
100 707.2232055664062
101 668.8936157226562
102 632.7479858398438
103 598.65380859375
104 566.4953002929688
105 536.1177978515625
106 507.4436340332031
107 480.37921142578125
108 454.8250732421875
109 430.6804504394531
110 407.8907165527344
111 386.35931396484375
112 366.00616455078125
113 346.7706604003906
114 328.5848693847656
115 311.3976745605469
116 295.1386413574219
117 279.7581481933594
118 265.2097473144531
119 251.46189880371094
120 238.43960571289062
121 226.11705017089844
122 214.45486450195312
123 203.4197235107422
124 192.96775817871094
125 183.071044921875
126 173.70144653320312
127 164.829345703125
128 156.4212188720703
129 148.45643615722656
130 140.91546630859375
131 133.76687622070312
132 126.98937225341797
133 120.5662841796875
134 114.4828872680664
135 108.71345520019531
136 103.24337768554688
137 98.05633544921875
138 93.13998413085938
139 88.47605895996094
140 84.05428314208984
141 79.8608169555664
142 75.8819580078125
143 72.10502624511719
144 68.52271270751953
145 65.12532043457031
146 61.89805603027344
147 58.836334228515625
148 55.929962158203125
149 53.170677185058594
150 50.549861907958984
151 48.06081771850586
152 45.700103759765625
153 43.45691680908203
154 41.325504302978516
155 39.301414489746094
156 37.38092803955078
157 35.55408477783203
158 33.81896209716797
159 32.171512603759766
160 30.605579376220703
161 29.117033004760742
162 27.702604293823242
163 26.358745574951172
164 25.08154296875
165 23.867353439331055
166 22.712717056274414
167 21.615930557250977
168 20.572134017944336
169 19.580699920654297
170 18.637662887573242
171 17.741374969482422
172 16.888561248779297
173 16.077194213867188
174 15.306276321411133
175 14.572515487670898
176 13.874746322631836
177 13.210748672485352
178 12.579141616821289
179 11.979034423828125
180 11.407556533813477
181 10.863014221191406
182 10.346213340759277
183 9.853654861450195
184 9.385334968566895
185 8.939493179321289
186 8.515151023864746
187 8.111286163330078
188 7.726725101470947
189 7.3609232902526855
190 7.012829780578613
191 6.681076526641846
192 6.365330219268799
193 6.0648980140686035
194 5.778782844543457
195 5.506417274475098
196 5.246812343597412
197 5.000002384185791
198 4.764811038970947
199 4.540788650512695
200 4.327359199523926
201 4.124493598937988
202 3.9310410022735596
203 3.746614933013916
204 3.5711936950683594
205 3.4039995670318604
206 3.244694232940674
207 3.0930655002593994
208 2.9483883380889893
209 2.8108131885528564
210 2.679750442504883
211 2.5546352863311768
212 2.435590982437134
213 2.3221476078033447
214 2.2139830589294434
215 2.1110336780548096
216 2.0127882957458496
217 1.9193521738052368
218 1.8301303386688232
219 1.745171070098877
220 1.6642420291900635
221 1.5870393514633179
222 1.513435959815979
223 1.4432859420776367
224 1.3765392303466797
225 1.3128900527954102
226 1.2520897388458252
227 1.1942427158355713
228 1.1389615535736084
229 1.086338758468628
230 1.0362673997879028
231 0.9884159564971924
232 0.9428192377090454
233 0.8994058966636658
234 0.8579621911048889
235 0.8184155225753784
236 0.7807740569114685
237 0.7448406219482422
238 0.710681140422821
239 0.6779468059539795
240 0.6467801928520203
241 0.6170735955238342
242 0.588671088218689
243 0.5617125034332275
244 0.5359553694725037
245 0.5113577842712402
246 0.48790857195854187
247 0.46559131145477295
248 0.4442404806613922
249 0.42400476336479187
250 0.4045943319797516
251 0.38612261414527893
252 0.36848366260528564
253 0.3516750633716583
254 0.3356301188468933
255 0.3203282952308655
256 0.30571258068084717
257 0.2917618155479431
258 0.2785148322582245
259 0.26581692695617676
260 0.25374501943588257
261 0.24219869077205658
262 0.23115305602550507
263 0.2206333428621292
264 0.21064327657222748
265 0.2010628581047058
266 0.19194838404655457
267 0.18322673439979553
268 0.17489303648471832
269 0.1669560670852661
270 0.15937769412994385
271 0.15214724838733673
272 0.14525596797466278
273 0.13866187632083893
274 0.13238802552223206
275 0.12640967965126038
276 0.12065470218658447
277 0.11519577354192734
278 0.11001263558864594
279 0.10504814982414246
280 0.10026419162750244
281 0.09574723988771439
282 0.091429702937603
283 0.08728934079408646
284 0.08334781229496002
285 0.07959749549627304
286 0.07601043581962585
287 0.07257093489170074
288 0.06928597390651703
289 0.0661698579788208
290 0.063198521733284
291 0.060338299721479416
292 0.05762496590614319
293 0.05501855909824371
294 0.05253804475069046
295 0.0501895546913147
296 0.04793006554245949
297 0.045775748789310455
298 0.04371613264083862
299 0.041756290942430496
300 0.0398823618888855
301 0.03810669109225273
302 0.03639371693134308
303 0.03476755693554878
304 0.03321025148034096
305 0.03172802925109863
306 0.030301682651042938
307 0.028944022953510284
308 0.027653805911540985
309 0.026425547897815704
310 0.025253470987081528
311 0.024126190692186356
312 0.023046424612402916
313 0.022021828219294548
314 0.021037960425019264
315 0.020098982378840446
316 0.019213838502764702
317 0.018365047872066498
318 0.017557669430971146
319 0.016771456226706505
320 0.016033926978707314
321 0.015327303670346737
322 0.014648103155195713
323 0.014005768112838268
324 0.013378898613154888
325 0.012798312120139599
326 0.012230894528329372
327 0.011691250838339329
328 0.011182270012795925
329 0.010690665803849697
330 0.010231331922113895
331 0.009783360175788403
332 0.009356237947940826
333 0.008950102142989635
334 0.008563403971493244
335 0.008192386478185654
336 0.007838456891477108
337 0.007499605882912874
338 0.007172219455242157
339 0.006861239206045866
340 0.006566877942532301
341 0.0062886569648981094
342 0.006020058877766132
343 0.005762588698416948
344 0.005518356338143349
345 0.005285939667373896
346 0.005065229721367359
347 0.00485409889370203
348 0.0046507553197443485
349 0.00445918831974268
350 0.004273888189345598
351 0.0040929922834038734
352 0.003922297153621912
353 0.003762215841561556
354 0.003610131097957492
355 0.0034619849175214767
356 0.003318971488624811
357 0.0031886883080005646
358 0.003053379012271762
359 0.002932621631771326
360 0.0028149106074124575
361 0.0027037677355110645
362 0.002597576705738902
363 0.002496157307177782
364 0.002400040626525879
365 0.0023054310586303473
366 0.0022154368925839663
367 0.002130622509866953
368 0.0020474332850426435
369 0.0019708508625626564
370 0.0018970506498590112
371 0.0018255586037412286
372 0.0017599689308553934
373 0.0016918984474614263
374 0.001630493439733982
375 0.0015709333820268512
376 0.0015147554222494364
377 0.0014598837587982416
378 0.001407200121320784
379 0.001355060376226902
380 0.0013082886580377817
381 0.0012626467505469918
382 0.0012189316330477595
383 0.0011760422494262457
384 0.001136132748797536
385 0.0010982209350913763
386 0.0010588071309030056
387 0.0010231889318674803
388 0.0009895163821056485
389 0.0009552748524583876
390 0.0009240593062713742
391 0.0008950341725721955
392 0.0008656525751575828
393 0.0008377445628866553
394 0.0008103660074993968
395 0.0007850135443732142
396 0.0007593724294565618
397 0.0007350958767347038
398 0.0007129504811018705
399 0.000691111374180764
400 0.0006702313548885286
401 0.0006491619860753417
402 0.0006294778431765735
403 0.0006091604591347277
404 0.0005918012466281652
405 0.0005756137543357909
406 0.0005585511680692434
407 0.0005409617442637682
408 0.0005254005081951618
409 0.0005103641306050122
410 0.0004945972468703985
411 0.0004815948777832091
412 0.00046821613796055317
413 0.00045571260852739215
414 0.0004436075105331838
415 0.00043114591971971095
416 0.0004175901412963867
417 0.00040718630771152675
418 0.00039635534631088376
419 0.00038428520201705396
420 0.00037445040652528405
421 0.00036432623164728284
422 0.00035437417682260275
423 0.00034547661198303103
424 0.00033651536796242
425 0.0003271134337410331
426 0.00032034076866693795
427 0.0003124748182017356
428 0.0003049919905606657
429 0.00029656023252755404
430 0.0002892390184570104
431 0.000282091525150463
432 0.00027539010625332594
433 0.000268500269157812
434 0.00026274198899045587
435 0.00025630617165006697
436 0.00025086913956329226
437 0.000244641094468534
438 0.00023908090952318162
439 0.0002336023171665147
440 0.00022825105406809598
441 0.0002234508574474603
442 0.00021835908410139382
443 0.0002128985506715253
444 0.00020799202320631593
445 0.0002038401726167649
446 0.00019949082343373448
447 0.00019482594507280737
448 0.00019073172006756067
449 0.00018668771372176707
450 0.00018311353051103652
451 0.000179195762029849
452 0.0001755318371579051
453 0.00017229147488251328
454 0.00016840535681694746
455 0.00016515243623871356
456 0.00016192454495467246
457 0.00015876589168328792
458 0.0001558404474053532
459 0.00015253337915055454
460 0.00014933093916624784
461 0.00014659212320111692
462 0.00014386828115675598
463 0.0001407732634106651
464 0.00013856601435691118
465 0.00013565643166657537
466 0.00013295254029799253
467 0.00013032619608566165
468 0.00012790423352271318
469 0.0001260286517208442
470 0.00012323194823693484
471 0.00012108607916161418
472 0.00011894437921000645
473 0.00011666701902868226
474 0.00011455415369709954
475 0.00011279732279945165
476 0.00011057213851017877
477 0.00010903249494731426
478 0.00010708707850426435
479 0.0001050951104843989
480 0.00010362230386817828
481 0.00010210249456577003
482 0.00010034090519184247
483 9.857038094196469e-05
484 9.681908704806119e-05
485 9.552631672704592e-05
486 9.410719212610275e-05
487 9.240899089490995e-05
488 9.091175888897851e-05
489 8.950365736382082e-05
490 8.796587644610554e-05
491 8.650228846818209e-05
492 8.510309271514416e-05
493 8.363602682948112e-05
494 8.214390254579484e-05
495 8.073675417108461e-05
496 7.957657362567261e-05
497 7.848977838875726e-05
498 7.755881961202249e-05
499 7.631465996382758e-05

PyTorch: Tensors and autograd


In [13]:
# -*- coding: utf-8 -*-
import torch

In [14]:
dtype = torch.float
device = torch.device("cpu")
# device = torch.device("cuda:0") # Uncomment this to run on GPU

In [15]:
# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
N, D_in, H, D_out = 64, 1000, 100, 10

In [16]:
# Create random Tensors to hold input and outputs.
# Setting requires_grad=False indicates that we do not need to compute gradients
# with respect to these Tensors during the backward pass.
x = torch.randn(N, D_in, device=device, dtype=dtype)
y = torch.randn(N, D_out, device=device, dtype=dtype)

In [17]:
# Create random Tensors for weights.
# Setting requires_grad=True indicates that we want to compute gradients with
# respect to these Tensors during the backward pass.
w1 = torch.randn(D_in, H, device=device, dtype=dtype, requires_grad=True)
w2 = torch.randn(H, D_out, device=device, dtype=dtype, requires_grad=True)

In [18]:
learning_rate = 1e-6
for t in range(500):
    # Forward pass: compute predicted y using operations on Tensors; these
    # are exactly the same operations we used to compute the forward pass using
    # Tensors, but we do not need to keep references to intermediate values since
    # we are not implementing the backward pass by hand.
    y_pred = x.mm(w1).clamp(min=0).mm(w2)

    # Compute and print loss using operations on Tensors.
    # Now loss is a Tensor of shape (1,)
    # loss.item() gets the a scalar value held in the loss.
    loss = (y_pred - y).pow(2).sum()
    print(t, loss.item())

    # Use autograd to compute the backward pass. This call will compute the
    # gradient of loss with respect to all Tensors with requires_grad=True.
    # After this call w1.grad and w2.grad will be Tensors holding the gradient
    # of the loss with respect to w1 and w2 respectively.
    loss.backward()

    # Manually update weights using gradient descent. Wrap in torch.no_grad()
    # because weights have requires_grad=True, but we don't need to track this
    # in autograd.
    # An alternative way is to operate on weight.data and weight.grad.data.
    # Recall that tensor.data gives a tensor that shares the storage with
    # tensor, but doesn't track history.
    # You can also use torch.optim.SGD to achieve this.
    with torch.no_grad():
        w1 -= learning_rate * w1.grad
        w2 -= learning_rate * w2.grad

        # Manually zero the gradients after updating weights
        w1.grad.zero_()
        w2.grad.zero_()


0 31369276.0
1 31981216.0
2 36917224.0
3 39131140.0
4 33450786.0
5 21331460.0
6 10648509.0
7 4769433.5
8 2316887.5
9 1350306.5
10 935844.25
11 722342.6875
12 589178.0625
13 493631.71875
14 419585.09375
15 359877.59375
16 310793.375
17 270007.34375
18 235800.703125
19 206840.03125
20 182222.953125
21 161145.03125
22 143020.125
23 127354.2890625
24 113743.4609375
25 101872.6171875
26 91496.0234375
27 82392.09375
28 74394.28125
29 67331.8203125
30 61069.046875
31 55496.1328125
32 50521.69921875
33 46073.49609375
34 42083.71484375
35 38499.1484375
36 35271.95703125
37 32358.484375
38 29724.671875
39 27337.791015625
40 25172.046875
41 23204.701171875
42 21416.37109375
43 19787.3359375
44 18300.6484375
45 16947.212890625
46 15709.6796875
47 14575.14453125
48 13533.5986328125
49 12577.2265625
50 11697.5537109375
51 10887.5224609375
52 10140.9755859375
53 9452.099609375
54 8815.8408203125
55 8227.6240234375
56 7683.33642578125
57 7180.84912109375
58 6714.9951171875
59 6282.9951171875
60 5882.14111328125
61 5510.68994140625
62 5165.2421875
63 4843.96923828125
64 4544.7666015625
65 4265.93994140625
66 4006.029541015625
67 3763.63037109375
68 3537.4970703125
69 3326.36083984375
70 3129.0029296875
71 2944.584716796875
72 2772.7353515625
73 2611.8701171875
74 2461.529296875
75 2320.639892578125
76 2188.571044921875
77 2064.720458984375
78 1948.4609375
79 1839.376953125
80 1736.9146728515625
81 1640.7047119140625
82 1550.258544921875
83 1465.03662109375
84 1384.9283447265625
85 1309.5550537109375
86 1238.57080078125
87 1171.717041015625
88 1108.7591552734375
89 1049.4293212890625
90 993.5038452148438
91 940.7783813476562
92 891.0142211914062
93 844.0593872070312
94 799.747802734375
95 757.900634765625
96 718.408935546875
97 681.0763549804688
98 645.8068237304688
99 612.4767456054688
100 580.9644775390625
101 551.1092529296875
102 522.8795776367188
103 496.17547607421875
104 470.90374755859375
105 446.987060546875
106 424.3419494628906
107 402.9055480957031
108 382.6146240234375
109 363.3885192871094
110 345.17388916015625
111 327.9053039550781
112 311.5434875488281
113 296.0309143066406
114 281.3265380859375
115 267.38165283203125
116 254.15933227539062
117 241.61892700195312
118 229.7158966064453
119 218.42550659179688
120 207.7133331298828
121 197.54368591308594
122 187.88662719726562
123 178.72129821777344
124 170.01856994628906
125 161.75503540039062
126 153.90652465820312
127 146.45079040527344
128 139.3656768798828
129 132.63653564453125
130 126.2405014038086
131 120.16236114501953
132 114.38690948486328
133 108.89716339111328
134 103.67780303955078
135 98.717041015625
136 93.9993896484375
137 89.51268005371094
138 85.24574279785156
139 81.1875228881836
140 77.32630157470703
141 73.65435791015625
142 70.1596908569336
143 66.83625793457031
144 63.67462921142578
145 60.66571807861328
146 57.800636291503906
147 55.07563400268555
148 52.48112106323242
149 50.01154327392578
150 47.6598014831543
151 45.42242431640625
152 43.29066467285156
153 41.26216506958008
154 39.329776763916016
155 37.490570068359375
156 35.7381477355957
157 34.0691032409668
158 32.47966003417969
159 30.966032028198242
160 29.524320602416992
161 28.14973258972168
162 26.841331481933594
163 25.594205856323242
164 24.405719757080078
165 23.273582458496094
166 22.195152282714844
167 21.1677303314209
168 20.188377380371094
169 19.25464630126953
170 18.364608764648438
171 17.516469955444336
172 16.70836639404297
173 15.938298225402832
174 15.203734397888184
175 14.503682136535645
176 13.836017608642578
177 13.199638366699219
178 12.593174934387207
179 12.014880180358887
180 11.463482856750488
181 10.937400817871094
182 10.435882568359375
183 9.957944869995117
184 9.50174331665039
185 9.066807746887207
186 8.65217113494873
187 8.25662899017334
188 7.8791327476501465
189 7.519959926605225
190 7.176374435424805
191 6.849245071411133
192 6.536914348602295
193 6.239029407501221
194 5.954916954040527
195 5.6837239265441895
196 5.42505407333374
197 5.178353309631348
198 4.94331693649292
199 4.718566417694092
200 4.504308700561523
201 4.3001322746276855
202 4.104951858520508
203 3.9186770915985107
204 3.741182327270508
205 3.57169771194458
206 3.409872531890869
207 3.2555012702941895
208 3.1082711219787598
209 2.9676249027252197
210 2.8335530757904053
211 2.705549955368042
212 2.5834200382232666
213 2.46662974357605
214 2.3554728031158447
215 2.249192237854004
216 2.1476681232452393
217 2.0508944988250732
218 1.9584617614746094
219 1.8701688051223755
220 1.7861438989639282
221 1.7058533430099487
222 1.6291093826293945
223 1.5557223558425903
224 1.485997200012207
225 1.4191170930862427
226 1.3552685976028442
227 1.2944519519805908
228 1.2362297773361206
229 1.1808701753616333
230 1.1278895139694214
231 1.0772868394851685
232 1.0290663242340088
233 0.982990026473999
234 0.93899005651474
235 0.8969478607177734
236 0.8567320108413696
237 0.8184086680412292
238 0.7818958759307861
239 0.746906042098999
240 0.7135385274887085
241 0.6816719770431519
242 0.65123051404953
243 0.6221243143081665
244 0.5943694710731506
245 0.5678160190582275
246 0.542497456073761
247 0.5183091163635254
248 0.49516725540161133
249 0.4731602072715759
250 0.4520984888076782
251 0.4319121837615967
252 0.4126942455768585
253 0.39430490136146545
254 0.3767971694469452
255 0.3600163757801056
256 0.34402191638946533
257 0.3287498652935028
258 0.3141406178474426
259 0.3002128005027771
260 0.2868545651435852
261 0.274140328168869
262 0.2619514763355255
263 0.25037699937820435
264 0.23927533626556396
265 0.22867242991924286
266 0.21852712333202362
267 0.2088497132062912
268 0.19964148104190826
269 0.19072379171848297
270 0.18232646584510803
271 0.1742115467786789
272 0.16653287410736084
273 0.15915203094482422
274 0.15211579203605652
275 0.14543479681015015
276 0.13894478976726532
277 0.13282060623168945
278 0.12695886194705963
279 0.1213577538728714
280 0.11600655317306519
281 0.11088646948337555
282 0.10599888116121292
283 0.10132721066474915
284 0.09683988243341446
285 0.09260252863168716
286 0.08851256966590881
287 0.08460996299982071
288 0.08089345693588257
289 0.07734373956918716
290 0.07393097132444382
291 0.07071755826473236
292 0.06756983697414398
293 0.0646158829331398
294 0.061768658459186554
295 0.05906762555241585
296 0.05646788701415062
297 0.05398567393422127
298 0.051623716950416565
299 0.04937642812728882
300 0.047209084033966064
301 0.04512450844049454
302 0.04315866157412529
303 0.04127657786011696
304 0.039470452815294266
305 0.03775878995656967
306 0.03611552715301514
307 0.034530527889728546
308 0.03302129730582237
309 0.03159194067120552
310 0.030212532728910446
311 0.02889585681259632
312 0.027637697756290436
313 0.026437824591994286
314 0.02529815025627613
315 0.02418944612145424
316 0.023147789761424065
317 0.022141823545098305
318 0.021188611164689064
319 0.020268330350518227
320 0.01938457041978836
321 0.018552221357822418
322 0.017744649201631546
323 0.016980420798063278
324 0.01625167205929756
325 0.015554271638393402
326 0.01488939207047224
327 0.014250248670578003
328 0.013642395846545696
329 0.013057025149464607
330 0.0125026386231184
331 0.011965377256274223
332 0.011454098857939243
333 0.010962548665702343
334 0.01050014328211546
335 0.010051440447568893
336 0.00963070522993803
337 0.009226883761584759
338 0.008834992535412312
339 0.008456142619252205
340 0.008106183260679245
341 0.00777090759947896
342 0.007447461597621441
343 0.007133921608328819
344 0.00684302719309926
345 0.006556009873747826
346 0.006283346097916365
347 0.006026070099323988
348 0.0057749939151108265
349 0.0055394223891198635
350 0.005312161985784769
351 0.005094206891953945
352 0.004890997428447008
353 0.004690874833613634
354 0.0044987681321799755
355 0.004317709244787693
356 0.00414631050080061
357 0.003980803769081831
358 0.003825815860182047
359 0.0036734992172569036
360 0.0035261171869933605
361 0.003388845594599843
362 0.003252011025324464
363 0.0031258133240044117
364 0.0030059802811592817
365 0.0028899465687572956
366 0.00278069032356143
367 0.0026729751843959093
368 0.0025714619550853968
369 0.002473847707733512
370 0.00238289893604815
371 0.0022923045326024294
372 0.0022070796694606543
373 0.002124000573530793
374 0.002046417910605669
375 0.0019682988058775663
376 0.0018961732275784016
377 0.0018294114852324128
378 0.0017638710560277104
379 0.0016994467005133629
380 0.0016399070154875517
381 0.001579908188432455
382 0.0015249969437718391
383 0.0014700148021802306
384 0.0014187688939273357
385 0.0013704029843211174
386 0.0013227936578914523
387 0.0012775837676599622
388 0.0012325083371251822
389 0.0011911869514733553
390 0.0011509066680446267
391 0.0011116501409560442
392 0.00107452308293432
393 0.001040400005877018
394 0.001006184727884829
395 0.0009724468691274524
396 0.0009424643940292299
397 0.0009116893052123487
398 0.0008844587719067931
399 0.0008549314807169139
400 0.0008302502101287246
401 0.0008042480912990868
402 0.0007802703767083585
403 0.0007563572144135833
404 0.0007341868476942182
405 0.0007108562276698649
406 0.000689658394549042
407 0.0006691936869174242
408 0.0006498009315691888
409 0.0006313191843219101
410 0.000612742907833308
411 0.0005951352650299668
412 0.0005776743637397885
413 0.0005614748224616051
414 0.0005448592128232121
415 0.0005313241854310036
416 0.0005158576532267034
417 0.0005024412530474365
418 0.0004884264781139791
419 0.0004756251582875848
420 0.00046150755952112377
421 0.0004497581976465881
422 0.0004375543794594705
423 0.00042648869566619396
424 0.0004153941699769348
425 0.0004039441701024771
426 0.0003932056133635342
427 0.0003829851921182126
428 0.00037326672463677824
429 0.00036438816459849477
430 0.0003559713077265769
431 0.00034667106228880584
432 0.00033826552680693567
433 0.00032966071739792824
434 0.00032153830397874117
435 0.0003136819286737591
436 0.0003061931056436151
437 0.000298681523418054
438 0.0002916710218414664
439 0.00028502344503067434
440 0.00027816754300147295
441 0.00027240486815571785
442 0.00026580042322166264
443 0.00025915223523043096
444 0.00025363537133671343
445 0.0002485929289832711
446 0.00024229296832345426
447 0.00023687374778091908
448 0.00023201448493637145
449 0.00022719780099578202
450 0.00022213789634406567
451 0.00021682160149794072
452 0.00021258226479403675
453 0.0002075975207844749
454 0.000203510164283216
455 0.0001987194991670549
456 0.00019493488071020693
457 0.00019100111967418343
458 0.00018690270371735096
459 0.00018323285621590912
460 0.00017956917872652411
461 0.00017488482990302145
462 0.00017134136578533798
463 0.0001681311841821298
464 0.00016513167065568268
465 0.0001623380958335474
466 0.0001589330640854314
467 0.00015548172814305872
468 0.00015298915968742222
469 0.00014991701755207032
470 0.00014667196955997497
471 0.00014384096721187234
472 0.00014136199024505913
473 0.0001387880911352113
474 0.00013618619414046407
475 0.00013365937047638
476 0.00013088418927509338
477 0.00012825067096855491
478 0.00012620753841474652
479 0.00012422047439031303
480 0.0001216293458128348
481 0.00011954974615946412
482 0.00011739064939320087
483 0.00011537575483089313
484 0.00011366842227289453
485 0.00011126096069347113
486 0.00010943142115138471
487 0.0001077211054507643
488 0.00010621421097312123
489 0.00010404370550531894
490 0.00010215401562163606
491 0.00010065233072964475
492 9.906561172101647e-05
493 9.730771853355691e-05
494 9.565421350998804e-05
495 9.420363494427875e-05
496 9.27369183045812e-05
497 9.111754479818046e-05
498 8.961146522779018e-05
499 8.870100282365456e-05

New Autograd functions


In [19]:
# -*- coding: utf-8 -*-
import torch


class MyReLU(torch.autograd.Function):
    """
    We can implement our own custom autograd Functions by subclassing
    torch.autograd.Function and implementing the forward and backward passes
    which operate on Tensors.
    """

    @staticmethod
    def forward(ctx, input):
        """
        In the forward pass we receive a Tensor containing the input and return
        a Tensor containing the output. ctx is a context object that can be used
        to stash information for backward computation. You can cache arbitrary
        objects for use in the backward pass using the ctx.save_for_backward method.
        """
        ctx.save_for_backward(input)
        return input.clamp(min=0)

    @staticmethod
    def backward(ctx, grad_output):
        """
        In the backward pass we receive a Tensor containing the gradient of the loss
        with respect to the output, and we need to compute the gradient of the loss
        with respect to the input.
        """
        input, = ctx.saved_tensors
        grad_input = grad_output.clone()
        grad_input[input < 0] = 0
        return grad_input

In [20]:
dtype = torch.float
device = torch.device("cpu")
# device = torch.device("cuda:0") # Uncomment this to run on GPU

In [21]:
# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
N, D_in, H, D_out = 64, 1000, 100, 10

In [22]:
# Create random Tensors to hold input and outputs.
x = torch.randn(N, D_in, device=device, dtype=dtype)
y = torch.randn(N, D_out, device=device, dtype=dtype)

# Create random Tensors for weights.
w1 = torch.randn(D_in, H, device=device, dtype=dtype, requires_grad=True)
w2 = torch.randn(H, D_out, device=device, dtype=dtype, requires_grad=True)

In [23]:
learning_rate = 1e-6
for t in range(500):
    # To apply our Function, we use Function.apply method. We alias this as 'relu'.
    relu = MyReLU.apply

    # Forward pass: compute predicted y using operations; we compute
    # ReLU using our custom autograd operation.
    y_pred = relu(x.mm(w1)).mm(w2)

    # Compute and print loss
    loss = (y_pred - y).pow(2).sum()
    print(t, loss.item())

    # Use autograd to compute the backward pass.
    loss.backward()

    # Update weights using gradient descent
    with torch.no_grad():
        w1 -= learning_rate * w1.grad
        w2 -= learning_rate * w2.grad

        # Manually zero the gradients after updating weights
        w1.grad.zero_()
        w2.grad.zero_()


0 28899014.0
1 22329404.0
2 21632238.0
3 22928274.0
4 23815748.0
5 22158440.0
6 17910940.0
7 12299582.0
8 7553762.0
9 4345823.5
10 2525171.25
11 1543969.75
12 1025549.9375
13 739061.375
14 570473.8125
15 462322.21875
16 387063.125
17 330746.40625
18 286368.65625
19 250209.796875
20 219984.875
21 194306.96875
22 172270.421875
23 153202.28125
24 136613.8125
25 122138.734375
26 109425.9140625
27 98240.2421875
28 88369.4375
29 79629.28125
30 71873.3515625
31 64971.34375
32 58816.6796875
33 53322.42578125
34 48404.53515625
35 43999.5546875
36 40043.0390625
37 36481.76953125
38 33269.35546875
39 30370.365234375
40 27749.763671875
41 25378.154296875
42 23228.984375
43 21279.2421875
44 19508.412109375
45 17898.58984375
46 16433.666015625
47 15099.208984375
48 13882.7099609375
49 12772.1044921875
50 11758.2158203125
51 10831.4150390625
52 9983.5703125
53 9207.6767578125
54 8496.576171875
55 7844.71630859375
56 7246.7294921875
57 6698.11279296875
58 6194.03857421875
59 5730.7587890625
60 5304.63671875
61 4912.677734375
62 4551.56884765625
63 4218.80419921875
64 3912.137939453125
65 3629.22314453125
66 3368.27734375
67 3127.33056640625
68 2904.85302734375
69 2699.229736328125
70 2509.25390625
71 2333.4580078125
72 2170.825927734375
73 2020.244873046875
74 1880.8260498046875
75 1751.564453125
76 1631.7445068359375
77 1520.61865234375
78 1417.5926513671875
79 1321.964111328125
80 1233.2340087890625
81 1150.809814453125
82 1074.220703125
83 1003.0589599609375
84 936.86767578125
85 875.31787109375
86 818.0427856445312
87 764.7525024414062
88 715.1278686523438
89 668.8966674804688
90 625.843994140625
91 585.7380981445312
92 548.3380126953125
93 513.4771728515625
94 480.9388732910156
95 450.59295654296875
96 422.2743225097656
97 395.8277587890625
98 371.1276550292969
99 348.0518493652344
100 326.5082702636719
101 306.3825988769531
102 287.5551452636719
103 269.9404602050781
104 253.47262573242188
105 238.06607055664062
106 223.6483154296875
107 210.14500427246094
108 197.50108337402344
109 185.67291259765625
110 174.58445739746094
111 164.19552612304688
112 154.45401000976562
113 145.32452392578125
114 136.76283264160156
115 128.73265075683594
116 121.19930267333984
117 114.13034057617188
118 107.49957275390625
119 101.27166748046875
120 95.42305755615234
121 89.9295883178711
122 84.7684097290039
123 79.91961669921875
124 75.36167907714844
125 71.07794189453125
126 67.05006408691406
127 63.262428283691406
128 59.699832916259766
129 56.348228454589844
130 53.19450378417969
131 50.22724914550781
132 47.4302864074707
133 44.79769515991211
134 42.31848907470703
135 39.98447799682617
136 37.78398513793945
137 35.71148681640625
138 33.75788879394531
139 31.917001724243164
140 30.180809020996094
141 28.543853759765625
142 26.999570846557617
143 25.54306983947754
144 24.168298721313477
145 22.871471405029297
146 21.646825790405273
147 20.4909725189209
148 19.400508880615234
149 18.369810104370117
150 17.397005081176758
151 16.477670669555664
152 15.609267234802246
153 14.788405418395996
154 14.012877464294434
155 13.279268264770508
156 12.58585262298584
157 11.930253028869629
158 11.310340881347656
159 10.724185943603516
160 10.169523239135742
161 9.644883155822754
162 9.148502349853516
163 8.678322792053223
164 8.23350715637207
165 7.812348365783691
166 7.41380500793457
167 7.036330223083496
168 6.678852081298828
169 6.339973449707031
170 6.019035339355469
171 5.715214252471924
172 5.426842212677002
173 5.154177665710449
174 4.89531946182251
175 4.650144577026367
176 4.417576313018799
177 4.1971659660339355
178 3.9879865646362305
179 3.7896363735198975
180 3.6015894412994385
181 3.423151731491089
182 3.253746509552002
183 3.093181610107422
184 2.9407248497009277
185 2.7961671352386475
186 2.658721446990967
187 2.5284483432769775
188 2.4046497344970703
189 2.2872142791748047
190 2.175534963607788
191 2.069606304168701
192 1.968910813331604
193 1.8732106685638428
194 1.7825340032577515
195 1.6962761878967285
196 1.6142094135284424
197 1.5362986326217651
198 1.462292194366455
199 1.3919354677200317
200 1.3250102996826172
201 1.2615675926208496
202 1.201151967048645
203 1.1436259746551514
204 1.0890264511108398
205 1.0369919538497925
206 0.9874929785728455
207 0.9406320452690125
208 0.8959044218063354
209 0.8533491492271423
210 0.8129067420959473
211 0.7744019627571106
212 0.7377903461456299
213 0.7029125094413757
214 0.6697103977203369
215 0.6381402015686035
216 0.6081456542015076
217 0.5794784426689148
218 0.5522739291191101
219 0.526378333568573
220 0.5017833709716797
221 0.47825002670288086
222 0.4558994174003601
223 0.4345787465572357
224 0.4143296480178833
225 0.3949624300003052
226 0.3766242563724518
227 0.3590945303440094
228 0.34238624572753906
229 0.3265029191970825
230 0.31134355068206787
231 0.29692959785461426
232 0.28317099809646606
233 0.2700602412223816
234 0.25756075978279114
235 0.2457006573677063
236 0.2343834489583969
237 0.22357967495918274
238 0.2132979780435562
239 0.20344820618629456
240 0.19411645829677582
241 0.18519024550914764
242 0.17670665681362152
243 0.16860367357730865
244 0.16085940599441528
245 0.15351592004299164
246 0.1465228646993637
247 0.13982705771923065
248 0.13339966535568237
249 0.12731336057186127
250 0.12149573862552643
251 0.11597117781639099
252 0.11068949848413467
253 0.10564509779214859
254 0.10085064172744751
255 0.09628519415855408
256 0.09189201146364212
257 0.08775632828474045
258 0.08376197516918182
259 0.07996509224176407
260 0.07635340839624405
261 0.07289698719978333
262 0.06961382180452347
263 0.06647074967622757
264 0.06346078217029572
265 0.06060446426272392
266 0.057872891426086426
267 0.05526606738567352
268 0.05277707427740097
269 0.05042143166065216
270 0.04814176261425018
271 0.04597500339150429
272 0.04392014443874359
273 0.04194740206003189
274 0.04006998986005783
275 0.03827722370624542
276 0.036558281630277634
277 0.03493072837591171
278 0.033370014280080795
279 0.031870294362306595
280 0.03044615127146244
281 0.02909279428422451
282 0.027807243168354034
283 0.026565566658973694
284 0.025387059897184372
285 0.02425084449350834
286 0.023180969059467316
287 0.02216145768761635
288 0.021174810826778412
289 0.020238280296325684
290 0.019349224865436554
291 0.01849633827805519
292 0.017675576731562614
293 0.016901209950447083
294 0.016150187700986862
295 0.015454140491783619
296 0.01476620975881815
297 0.014124251902103424
298 0.01350108627229929
299 0.012916531413793564
300 0.012358318082988262
301 0.01181434839963913
302 0.011300850659608841
303 0.010814260691404343
304 0.010343598201870918
305 0.009897447191178799
306 0.009474804624915123
307 0.009062973782420158
308 0.008677521720528603
309 0.00830510538071394
310 0.007949616760015488
311 0.007607665844261646
312 0.007288388442248106
313 0.0069809891283512115
314 0.006684108637273312
315 0.006403342820703983
316 0.006134055554866791
317 0.005877957213670015
318 0.005626732017844915
319 0.005397150292992592
320 0.005170548800379038
321 0.004956928547471762
322 0.004750540945678949
323 0.004554181359708309
324 0.0043679699301719666
325 0.004184650722891092
326 0.004017272498458624
327 0.0038495396729558706
328 0.00369398039765656
329 0.0035489292349666357
330 0.00340515305288136
331 0.0032691597007215023
332 0.0031376336701214314
333 0.0030084738973528147
334 0.002891132840886712
335 0.002773386426270008
336 0.0026636426337063313
337 0.002562454901635647
338 0.002463354030624032
339 0.002367212902754545
340 0.0022745984606444836
341 0.002189223188906908
342 0.0021042197477072477
343 0.002025123918429017
344 0.0019469748949632049
345 0.0018752121832221746
346 0.001805526320822537
347 0.0017392311710864305
348 0.0016745541943237185
349 0.0016121527878567576
350 0.0015535509446635842
351 0.001496797543950379
352 0.0014430396258831024
353 0.001393394428305328
354 0.0013427846133708954
355 0.0012938202125951648
356 0.001248534768819809
357 0.0012041431618854403
358 0.0011622532038018107
359 0.0011208616197109222
360 0.0010826484067365527
361 0.0010453712893649936
362 0.0010094568133354187
363 0.000976567855104804
364 0.0009431311045773327
365 0.0009104653727263212
366 0.0008810959989205003
367 0.0008502991986460984
368 0.0008218575967475772
369 0.0007958378409966826
370 0.0007710660574957728
371 0.0007453853613696992
372 0.0007219513063319027
373 0.0006993470597080886
374 0.0006768692401237786
375 0.0006553860730491579
376 0.0006346987211145461
377 0.0006153729045763612
378 0.000597046164330095
379 0.0005785053363069892
380 0.0005606397171504796
381 0.0005439047235995531
382 0.0005277692689560354
383 0.000512321072164923
384 0.0004975554184056818
385 0.00048288225661963224
386 0.0004690395144280046
387 0.00045553146628662944
388 0.00044214262743480504
389 0.0004292019584681839
390 0.0004179051611572504
391 0.00040634666220285
392 0.00039546863990835845
393 0.00038434407906606793
394 0.0003736951621249318
395 0.00036357552744448185
396 0.00035359844332560897
397 0.0003445164184086025
398 0.0003363668511155993
399 0.0003275035123806447
400 0.0003196345642209053
401 0.00031085131922736764
402 0.00030242110369727015
403 0.0002958440745715052
404 0.0002878845843952149
405 0.0002810861624311656
406 0.0002743701625149697
407 0.0002674663264770061
408 0.0002608420909382403
409 0.0002545211464166641
410 0.000249127100687474
411 0.00024307394050993025
412 0.00023744192731101066
413 0.00023193220840767026
414 0.00022633357730228454
415 0.0002210426755482331
416 0.00021550178644247353
417 0.00021073513198643923
418 0.00020628058700822294
419 0.0002013807970797643
420 0.00019700168923009187
421 0.0001929177960846573
422 0.00018849737534765154
423 0.0001840357290348038
424 0.00017999042756855488
425 0.0001762430474627763
426 0.00017274457786697894
427 0.0001692135992925614
428 0.00016547985433135182
429 0.000162275304319337
430 0.0001583246630616486
431 0.00015449417696800083
432 0.00015177298337221146
433 0.0001483277592342347
434 0.0001453889417462051
435 0.00014289373939391226
436 0.00014007213758304715
437 0.0001371872640447691
438 0.00013447366654872894
439 0.00013163175026420504
440 0.0001287193881580606
441 0.0001262794539798051
442 0.00012372883793432266
443 0.00012155255535617471
444 0.00011943325807806104
445 0.00011719951726263389
446 0.00011455269122961909
447 0.00011267128138570115
448 0.00011063482088502496
449 0.00010856804146897048
450 0.00010639888932928443
451 0.00010465090599609539
452 0.0001030023122439161
453 0.00010114735778188333
454 9.95096197584644e-05
455 9.755820065038279e-05
456 9.610663983039558e-05
457 9.440637222724035e-05
458 9.272629540646449e-05
459 9.112987027037889e-05
460 8.940402767620981e-05
461 8.803785021882504e-05
462 8.645666821394116e-05
463 8.51835502544418e-05
464 8.37777042761445e-05
465 8.228963270084932e-05
466 8.089435868896544e-05
467 7.964554242789745e-05
468 7.828992238501087e-05
469 7.693169027334079e-05
470 7.579565135529265e-05
471 7.454975275322795e-05
472 7.324118632823229e-05
473 7.165219722082838e-05
474 7.050712883938104e-05
475 6.930742529220879e-05
476 6.84030310367234e-05
477 6.775839574402198e-05
478 6.673327879980206e-05
479 6.589785334654152e-05
480 6.488296639872715e-05
481 6.381313141901046e-05
482 6.307488365564495e-05
483 6.202980875968933e-05
484 6.107716035330668e-05
485 6.00262574153021e-05
486 5.917789530940354e-05
487 5.8556703152135015e-05
488 5.767331094830297e-05
489 5.685546057065949e-05
490 5.5817443353589624e-05
491 5.4903957789065316e-05
492 5.4349147831089795e-05
493 5.373707244871184e-05
494 5.273582064546645e-05
495 5.194569894229062e-05
496 5.135536412126385e-05
497 5.068121390650049e-05
498 5.015898932470009e-05
499 4.951158189214766e-05

In [ ]: